In [1]:
import glob
import math
import matplotlib.image as mpimg
import matplotlib.pyplot as plt
import numpy as np
import random
import sklearn.metrics as metrics

from tensorflow.keras import optimizers
from tensorflow.keras.callbacks import ModelCheckpoint, CSVLogger, LearningRateScheduler
from tensorflow.keras.models import Model
from tensorflow.keras.preprocessing.image import ImageDataGenerator
from tensorflow.keras.layers import add, concatenate, Conv2D, Dense, Dropout, Flatten, Input
from tensorflow.keras.layers import Activation, AveragePooling2D, BatchNormalization, MaxPooling2D
from tensorflow.keras.regularizers import l2
from tensorflow.keras.utils import to_categorical


%matplotlib inline
In [2]:
                            # Set up 'ggplot' style
plt.style.use('ggplot')     # if want to use the default style, set 'classic'
plt.rcParams['ytick.right']     = True
plt.rcParams['ytick.labelright']= True
plt.rcParams['ytick.left']      = False
plt.rcParams['ytick.labelleft'] = False
plt.rcParams['font.family']     = 'Arial'
In [3]:
# where am i?
%pwd
Out[3]:
'C:\\Users\\david\\Documents\\ImageNet'
In [4]:
flowers = glob.glob('./data/flr_*.jpg')
fungus = glob.glob('./data/fgs_*.jpg')
rocks = glob.glob('./data/rck_*.jpg')

pixel_flowers = glob.glob('./data/pxl_flower_*.jpeg')
pixel_umbrella = glob.glob('./data/pxl_umbrella_*.jpeg')
print("There are %s, %s flower, %s fungus, %s rock and %s umbrella pictures" %(len(flowers), len(pixel_flowers), len(fungus), len(rocks), len(pixel_umbrella)))
There are 1269, 1792 flower, 856 fungus, 1007 rock and 420 umbrella pictures
In [5]:
# Randomly show 10 examples of the images
from IPython.display import Image
    
dataset = flowers #flowers #fungus #rocks

for i in range(0, 5):
    index = random.randint(0, len(dataset)-1)   
    print("Showing:", dataset[index])
    
    img = mpimg.imread(dataset[index])
    imgplot = plt.imshow(img)
    plt.show()

#Image(dataset[index])
Showing: ./data\flr_00379.jpg
Showing: ./data\flr_01794.jpg
Showing: ./data\flr_01729.jpg
Showing: ./data\flr_00978.jpg
Showing: ./data\flr_01767.jpg

Extract the training and testing datasets

In [6]:
# Load the data
trDatOrg       = np.load('flrnonflr-train-imgs96-0.8.npz')['arr_0']
trLblOrg       = np.load('flrnonflr-train-labels96-0.8.npz')['arr_0']
tsDatOrg       = np.load('flrnonflr-test-imgs96-0.8.npz')['arr_0']
tsLblOrg       = np.load('flrnonflr-test-labels96-0.8.npz')['arr_0']
In [7]:
print("For the training and test datasets:")
print("The shapes are %s, %s, %s, %s" \
      %(trDatOrg.shape, trLblOrg.shape, tsDatOrg.shape, tsLblOrg.shape))
For the training and test datasets:
The shapes are (4264, 96, 96, 3), (4264,), (1067, 96, 96, 3), (1067,)
In [8]:
# Randomly show 10 examples of the images

data = tsDatOrg
label = tsLblOrg

for i in range(20):
    index = random.randint(0, len(data)-1)
    print("Showing %s index image, It is %s" %(index, label[index]))
    imgplot = plt.imshow(data[index])
    plt.show()
Showing 562 index image, It is 1.0
Showing 585 index image, It is 1.0
Showing 780 index image, It is 0.0
Showing 172 index image, It is 1.0
Showing 1044 index image, It is 0.0
Showing 545 index image, It is 1.0
Showing 281 index image, It is 1.0
Showing 421 index image, It is 1.0
Showing 141 index image, It is 1.0
Showing 772 index image, It is 0.0
Showing 133 index image, It is 1.0
Showing 896 index image, It is 0.0
Showing 451 index image, It is 1.0
Showing 915 index image, It is 0.0
Showing 376 index image, It is 1.0
Showing 186 index image, It is 1.0
Showing 354 index image, It is 1.0
Showing 40 index image, It is 1.0
Showing 1048 index image, It is 0.0
Showing 982 index image, It is 0.0
In [9]:
# Convert the data into 'float32'
# Rescale the values from 0~255 to 0~1
trDat       = trDatOrg.astype('float32')/255
tsDat       = tsDatOrg.astype('float32')/255

# Retrieve the row size of each image
# Retrieve the column size of each image
imgrows     = trDat.shape[1]
imgclms     = trDat.shape[2]
channel     = 3

# # reshape the data to be [samples][width][height][channel]
# # This is required by Keras framework
# trDat       = trDat.reshape(trDat.shape[0], imgrows, imgclms, channel)
# tsDat       = tsDat.reshape(tsDat.shape[0], imgrows, imgclms, channel)

# Perform one hot encoding on the labels
# Retrieve the number of classes in this problem
trLbl       = to_categorical(trLblOrg)
tsLbl       = to_categorical(tsLblOrg)
num_classes = tsLbl.shape[1]
In [12]:
# fix random seed for reproducibility
seed = 29
np.random.seed(seed)


modelname = 'FlowerPower'

def createBaselineModel():
    inputs = Input(shape=(imgrows, imgclms, channel))
    x = Conv2D(30, (4, 4), activation='relu')(inputs)
    x = MaxPooling2D(pool_size=(2, 2))(x)
    x = Conv2D(50, (4, 4), activation='relu')(x)
    x = MaxPooling2D(pool_size=(2, 2))(x)
    x = Dropout(0.3)(x)
    x = Flatten()(x)
    x = Dense(32, activation='relu')(x)
    x = Dense(num_classes, activation='softmax')(x)
    
    model = Model(inputs=[inputs],outputs=x)
    
    model.compile(loss='categorical_crossentropy', 
                  optimizer='adam',
                  metrics=['accuracy'])
    return model

def resLyr(inputs,
           numFilters=16,
           kernelSz=3,
           strides=1,
           activation='relu',
           batchNorm=True,
           convFirst=True,
           lyrName=None):
    convLyr = Conv2D(numFilters, kernel_size=kernelSz, strides=strides, 
                     padding='same', kernel_initializer='he_normal', 
                     kernel_regularizer=l2(1e-4), 
                     name=lyrName+'_conv' if lyrName else None)
    x = inputs
    if convFirst:
        x = convLyr(x)
        if batchNorm:
            x = BatchNormalization(name=lyrName+'_bn' if lyrName else None)(x)
        if activation is not None:
            x = Activation(activation,name=lyrName+'_'+activation if lyrName else None)(x)
    else:
        if batchNorm:
            x = BatchNormalization(name=lyrName+'_bn' if lyrName else None)(x)
        if activation is not None:
            x = Activation(activation, name=lyrName+'_'+activation if lyrName else None)(x)
        x = convLyr(x)
    return x


def resBlkV1(inputs,
             numFilters=16,
             numBlocks=3,
             downsampleOnFirst=True,
             names=None):
    x = inputs
    for run in range(0,numBlocks):
        strides = 1
        blkStr = str(run+1)
        if downsampleOnFirst and run == 0:
            strides = 2
        y = resLyr(inputs=x, numFilters=numFilters, strides=strides,
                   lyrName=names+'_Blk'+blkStr+'_Res1' if names else None)
        y = resLyr(inputs=y, numFilters=numFilters, activation=None,
                   lyrName=names+'_Blk'+blkStr+'_Res2' if names else None)
        if downsampleOnFirst and run == 0:
            x = resLyr(inputs=x, numFilters=numFilters, kernelSz=1,
                       strides=strides, activation=None, batchNorm=False,
                       lyrName=names+'_Blk'+blkStr+'_lin' if names else None)
        x = add([x,y], name=names+'_Blk'+blkStr+'_add' if names else None)
        x = Activation('relu', name=names+'_Blk'+blkStr+'_relu' if names else None)(x)
    return x

#optmz = optimizers.Adam(lr=0.001)
optmz = optimizers.RMSprop(lr=0.001)

def createResNetV1(inputShape=(imgrows, imgclms, channel),
                   numClasses=2):
    inputs = Input(shape=inputShape)
    v = resLyr(inputs, lyrName='Inpt')
    v = resBlkV1(inputs=v, numFilters=16, numBlocks=3,
                 downsampleOnFirst=False, names='Stg1')
    v = Dropout(0.30)(v)
    v = resBlkV1(inputs=v, numFilters=32, numBlocks=3,
                 downsampleOnFirst=True, names='Stg2')
    v = Dropout(0.40)(v)
    v = resBlkV1(inputs=v, numFilters=64, numBlocks=3,
                 downsampleOnFirst=True, names='Stg3')
    v = Dropout(0.50)(v)
    v = resBlkV1(inputs=v, numFilters=128, numBlocks=3,
                 downsampleOnFirst=True, names='Stg4')
    v = Dropout(0.50)(v)
    v = resBlkV1(inputs=v, numFilters=128, numBlocks=3,
                 downsampleOnFirst=False, names='Stg5')
    v = Dropout(0.50)(v)
    v = resBlkV1(inputs=v, numFilters=256, numBlocks=3,
                 downsampleOnFirst=True, names='Stg6')
    v = Dropout(0.50)(v)
    v = AveragePooling2D(pool_size=6, name='AvgPool')(v)
    v = Flatten()(v) 
    outputs = Dense(numClasses, activation='softmax', 
                    kernel_initializer='he_normal')(v)
    model = Model(inputs=inputs,outputs=outputs)
    model.compile(loss='categorical_crossentropy', 
                  optimizer=optmz, 
                  metrics=['accuracy'])
    return model



# Setup the models
model       = createResNetV1() # This is meant for training
modelGo     = createResNetV1() # This is used for final testing

model.summary()
__________________________________________________________________________________________________
Layer (type)                    Output Shape         Param #     Connected to                     
==================================================================================================
input_3 (InputLayer)            (None, 96, 96, 3)    0                                            
__________________________________________________________________________________________________
Inpt_conv (Conv2D)              (None, 96, 96, 16)   448         input_3[0][0]                    
__________________________________________________________________________________________________
Inpt_bn (BatchNormalization)    (None, 96, 96, 16)   64          Inpt_conv[0][0]                  
__________________________________________________________________________________________________
Inpt_relu (Activation)          (None, 96, 96, 16)   0           Inpt_bn[0][0]                    
__________________________________________________________________________________________________
Stg1_Blk1_Res1_conv (Conv2D)    (None, 96, 96, 16)   2320        Inpt_relu[0][0]                  
__________________________________________________________________________________________________
Stg1_Blk1_Res1_bn (BatchNormali (None, 96, 96, 16)   64          Stg1_Blk1_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg1_Blk1_Res1_relu (Activation (None, 96, 96, 16)   0           Stg1_Blk1_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg1_Blk1_Res2_conv (Conv2D)    (None, 96, 96, 16)   2320        Stg1_Blk1_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg1_Blk1_Res2_bn (BatchNormali (None, 96, 96, 16)   64          Stg1_Blk1_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg1_Blk1_add (Add)             (None, 96, 96, 16)   0           Inpt_relu[0][0]                  
                                                                 Stg1_Blk1_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg1_Blk1_relu (Activation)     (None, 96, 96, 16)   0           Stg1_Blk1_add[0][0]              
__________________________________________________________________________________________________
Stg1_Blk2_Res1_conv (Conv2D)    (None, 96, 96, 16)   2320        Stg1_Blk1_relu[0][0]             
__________________________________________________________________________________________________
Stg1_Blk2_Res1_bn (BatchNormali (None, 96, 96, 16)   64          Stg1_Blk2_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg1_Blk2_Res1_relu (Activation (None, 96, 96, 16)   0           Stg1_Blk2_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg1_Blk2_Res2_conv (Conv2D)    (None, 96, 96, 16)   2320        Stg1_Blk2_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg1_Blk2_Res2_bn (BatchNormali (None, 96, 96, 16)   64          Stg1_Blk2_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg1_Blk2_add (Add)             (None, 96, 96, 16)   0           Stg1_Blk1_relu[0][0]             
                                                                 Stg1_Blk2_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg1_Blk2_relu (Activation)     (None, 96, 96, 16)   0           Stg1_Blk2_add[0][0]              
__________________________________________________________________________________________________
Stg1_Blk3_Res1_conv (Conv2D)    (None, 96, 96, 16)   2320        Stg1_Blk2_relu[0][0]             
__________________________________________________________________________________________________
Stg1_Blk3_Res1_bn (BatchNormali (None, 96, 96, 16)   64          Stg1_Blk3_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg1_Blk3_Res1_relu (Activation (None, 96, 96, 16)   0           Stg1_Blk3_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg1_Blk3_Res2_conv (Conv2D)    (None, 96, 96, 16)   2320        Stg1_Blk3_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg1_Blk3_Res2_bn (BatchNormali (None, 96, 96, 16)   64          Stg1_Blk3_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg1_Blk3_add (Add)             (None, 96, 96, 16)   0           Stg1_Blk2_relu[0][0]             
                                                                 Stg1_Blk3_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg1_Blk3_relu (Activation)     (None, 96, 96, 16)   0           Stg1_Blk3_add[0][0]              
__________________________________________________________________________________________________
dropout_12 (Dropout)            (None, 96, 96, 16)   0           Stg1_Blk3_relu[0][0]             
__________________________________________________________________________________________________
Stg2_Blk1_Res1_conv (Conv2D)    (None, 48, 48, 32)   4640        dropout_12[0][0]                 
__________________________________________________________________________________________________
Stg2_Blk1_Res1_bn (BatchNormali (None, 48, 48, 32)   128         Stg2_Blk1_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg2_Blk1_Res1_relu (Activation (None, 48, 48, 32)   0           Stg2_Blk1_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg2_Blk1_Res2_conv (Conv2D)    (None, 48, 48, 32)   9248        Stg2_Blk1_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg2_Blk1_lin_conv (Conv2D)     (None, 48, 48, 32)   544         dropout_12[0][0]                 
__________________________________________________________________________________________________
Stg2_Blk1_Res2_bn (BatchNormali (None, 48, 48, 32)   128         Stg2_Blk1_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg2_Blk1_add (Add)             (None, 48, 48, 32)   0           Stg2_Blk1_lin_conv[0][0]         
                                                                 Stg2_Blk1_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg2_Blk1_relu (Activation)     (None, 48, 48, 32)   0           Stg2_Blk1_add[0][0]              
__________________________________________________________________________________________________
Stg2_Blk2_Res1_conv (Conv2D)    (None, 48, 48, 32)   9248        Stg2_Blk1_relu[0][0]             
__________________________________________________________________________________________________
Stg2_Blk2_Res1_bn (BatchNormali (None, 48, 48, 32)   128         Stg2_Blk2_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg2_Blk2_Res1_relu (Activation (None, 48, 48, 32)   0           Stg2_Blk2_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg2_Blk2_Res2_conv (Conv2D)    (None, 48, 48, 32)   9248        Stg2_Blk2_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg2_Blk2_Res2_bn (BatchNormali (None, 48, 48, 32)   128         Stg2_Blk2_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg2_Blk2_add (Add)             (None, 48, 48, 32)   0           Stg2_Blk1_relu[0][0]             
                                                                 Stg2_Blk2_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg2_Blk2_relu (Activation)     (None, 48, 48, 32)   0           Stg2_Blk2_add[0][0]              
__________________________________________________________________________________________________
Stg2_Blk3_Res1_conv (Conv2D)    (None, 48, 48, 32)   9248        Stg2_Blk2_relu[0][0]             
__________________________________________________________________________________________________
Stg2_Blk3_Res1_bn (BatchNormali (None, 48, 48, 32)   128         Stg2_Blk3_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg2_Blk3_Res1_relu (Activation (None, 48, 48, 32)   0           Stg2_Blk3_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg2_Blk3_Res2_conv (Conv2D)    (None, 48, 48, 32)   9248        Stg2_Blk3_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg2_Blk3_Res2_bn (BatchNormali (None, 48, 48, 32)   128         Stg2_Blk3_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg2_Blk3_add (Add)             (None, 48, 48, 32)   0           Stg2_Blk2_relu[0][0]             
                                                                 Stg2_Blk3_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg2_Blk3_relu (Activation)     (None, 48, 48, 32)   0           Stg2_Blk3_add[0][0]              
__________________________________________________________________________________________________
dropout_13 (Dropout)            (None, 48, 48, 32)   0           Stg2_Blk3_relu[0][0]             
__________________________________________________________________________________________________
Stg3_Blk1_Res1_conv (Conv2D)    (None, 24, 24, 64)   18496       dropout_13[0][0]                 
__________________________________________________________________________________________________
Stg3_Blk1_Res1_bn (BatchNormali (None, 24, 24, 64)   256         Stg3_Blk1_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg3_Blk1_Res1_relu (Activation (None, 24, 24, 64)   0           Stg3_Blk1_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg3_Blk1_Res2_conv (Conv2D)    (None, 24, 24, 64)   36928       Stg3_Blk1_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg3_Blk1_lin_conv (Conv2D)     (None, 24, 24, 64)   2112        dropout_13[0][0]                 
__________________________________________________________________________________________________
Stg3_Blk1_Res2_bn (BatchNormali (None, 24, 24, 64)   256         Stg3_Blk1_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg3_Blk1_add (Add)             (None, 24, 24, 64)   0           Stg3_Blk1_lin_conv[0][0]         
                                                                 Stg3_Blk1_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg3_Blk1_relu (Activation)     (None, 24, 24, 64)   0           Stg3_Blk1_add[0][0]              
__________________________________________________________________________________________________
Stg3_Blk2_Res1_conv (Conv2D)    (None, 24, 24, 64)   36928       Stg3_Blk1_relu[0][0]             
__________________________________________________________________________________________________
Stg3_Blk2_Res1_bn (BatchNormali (None, 24, 24, 64)   256         Stg3_Blk2_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg3_Blk2_Res1_relu (Activation (None, 24, 24, 64)   0           Stg3_Blk2_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg3_Blk2_Res2_conv (Conv2D)    (None, 24, 24, 64)   36928       Stg3_Blk2_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg3_Blk2_Res2_bn (BatchNormali (None, 24, 24, 64)   256         Stg3_Blk2_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg3_Blk2_add (Add)             (None, 24, 24, 64)   0           Stg3_Blk1_relu[0][0]             
                                                                 Stg3_Blk2_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg3_Blk2_relu (Activation)     (None, 24, 24, 64)   0           Stg3_Blk2_add[0][0]              
__________________________________________________________________________________________________
Stg3_Blk3_Res1_conv (Conv2D)    (None, 24, 24, 64)   36928       Stg3_Blk2_relu[0][0]             
__________________________________________________________________________________________________
Stg3_Blk3_Res1_bn (BatchNormali (None, 24, 24, 64)   256         Stg3_Blk3_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg3_Blk3_Res1_relu (Activation (None, 24, 24, 64)   0           Stg3_Blk3_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg3_Blk3_Res2_conv (Conv2D)    (None, 24, 24, 64)   36928       Stg3_Blk3_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg3_Blk3_Res2_bn (BatchNormali (None, 24, 24, 64)   256         Stg3_Blk3_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg3_Blk3_add (Add)             (None, 24, 24, 64)   0           Stg3_Blk2_relu[0][0]             
                                                                 Stg3_Blk3_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg3_Blk3_relu (Activation)     (None, 24, 24, 64)   0           Stg3_Blk3_add[0][0]              
__________________________________________________________________________________________________
dropout_14 (Dropout)            (None, 24, 24, 64)   0           Stg3_Blk3_relu[0][0]             
__________________________________________________________________________________________________
Stg4_Blk1_Res1_conv (Conv2D)    (None, 12, 12, 128)  73856       dropout_14[0][0]                 
__________________________________________________________________________________________________
Stg4_Blk1_Res1_bn (BatchNormali (None, 12, 12, 128)  512         Stg4_Blk1_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg4_Blk1_Res1_relu (Activation (None, 12, 12, 128)  0           Stg4_Blk1_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg4_Blk1_Res2_conv (Conv2D)    (None, 12, 12, 128)  147584      Stg4_Blk1_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg4_Blk1_lin_conv (Conv2D)     (None, 12, 12, 128)  8320        dropout_14[0][0]                 
__________________________________________________________________________________________________
Stg4_Blk1_Res2_bn (BatchNormali (None, 12, 12, 128)  512         Stg4_Blk1_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg4_Blk1_add (Add)             (None, 12, 12, 128)  0           Stg4_Blk1_lin_conv[0][0]         
                                                                 Stg4_Blk1_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg4_Blk1_relu (Activation)     (None, 12, 12, 128)  0           Stg4_Blk1_add[0][0]              
__________________________________________________________________________________________________
Stg4_Blk2_Res1_conv (Conv2D)    (None, 12, 12, 128)  147584      Stg4_Blk1_relu[0][0]             
__________________________________________________________________________________________________
Stg4_Blk2_Res1_bn (BatchNormali (None, 12, 12, 128)  512         Stg4_Blk2_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg4_Blk2_Res1_relu (Activation (None, 12, 12, 128)  0           Stg4_Blk2_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg4_Blk2_Res2_conv (Conv2D)    (None, 12, 12, 128)  147584      Stg4_Blk2_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg4_Blk2_Res2_bn (BatchNormali (None, 12, 12, 128)  512         Stg4_Blk2_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg4_Blk2_add (Add)             (None, 12, 12, 128)  0           Stg4_Blk1_relu[0][0]             
                                                                 Stg4_Blk2_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg4_Blk2_relu (Activation)     (None, 12, 12, 128)  0           Stg4_Blk2_add[0][0]              
__________________________________________________________________________________________________
Stg4_Blk3_Res1_conv (Conv2D)    (None, 12, 12, 128)  147584      Stg4_Blk2_relu[0][0]             
__________________________________________________________________________________________________
Stg4_Blk3_Res1_bn (BatchNormali (None, 12, 12, 128)  512         Stg4_Blk3_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg4_Blk3_Res1_relu (Activation (None, 12, 12, 128)  0           Stg4_Blk3_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg4_Blk3_Res2_conv (Conv2D)    (None, 12, 12, 128)  147584      Stg4_Blk3_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg4_Blk3_Res2_bn (BatchNormali (None, 12, 12, 128)  512         Stg4_Blk3_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg4_Blk3_add (Add)             (None, 12, 12, 128)  0           Stg4_Blk2_relu[0][0]             
                                                                 Stg4_Blk3_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg4_Blk3_relu (Activation)     (None, 12, 12, 128)  0           Stg4_Blk3_add[0][0]              
__________________________________________________________________________________________________
dropout_15 (Dropout)            (None, 12, 12, 128)  0           Stg4_Blk3_relu[0][0]             
__________________________________________________________________________________________________
Stg5_Blk1_Res1_conv (Conv2D)    (None, 12, 12, 128)  147584      dropout_15[0][0]                 
__________________________________________________________________________________________________
Stg5_Blk1_Res1_bn (BatchNormali (None, 12, 12, 128)  512         Stg5_Blk1_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg5_Blk1_Res1_relu (Activation (None, 12, 12, 128)  0           Stg5_Blk1_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg5_Blk1_Res2_conv (Conv2D)    (None, 12, 12, 128)  147584      Stg5_Blk1_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg5_Blk1_Res2_bn (BatchNormali (None, 12, 12, 128)  512         Stg5_Blk1_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg5_Blk1_add (Add)             (None, 12, 12, 128)  0           dropout_15[0][0]                 
                                                                 Stg5_Blk1_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg5_Blk1_relu (Activation)     (None, 12, 12, 128)  0           Stg5_Blk1_add[0][0]              
__________________________________________________________________________________________________
Stg5_Blk2_Res1_conv (Conv2D)    (None, 12, 12, 128)  147584      Stg5_Blk1_relu[0][0]             
__________________________________________________________________________________________________
Stg5_Blk2_Res1_bn (BatchNormali (None, 12, 12, 128)  512         Stg5_Blk2_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg5_Blk2_Res1_relu (Activation (None, 12, 12, 128)  0           Stg5_Blk2_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg5_Blk2_Res2_conv (Conv2D)    (None, 12, 12, 128)  147584      Stg5_Blk2_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg5_Blk2_Res2_bn (BatchNormali (None, 12, 12, 128)  512         Stg5_Blk2_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg5_Blk2_add (Add)             (None, 12, 12, 128)  0           Stg5_Blk1_relu[0][0]             
                                                                 Stg5_Blk2_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg5_Blk2_relu (Activation)     (None, 12, 12, 128)  0           Stg5_Blk2_add[0][0]              
__________________________________________________________________________________________________
Stg5_Blk3_Res1_conv (Conv2D)    (None, 12, 12, 128)  147584      Stg5_Blk2_relu[0][0]             
__________________________________________________________________________________________________
Stg5_Blk3_Res1_bn (BatchNormali (None, 12, 12, 128)  512         Stg5_Blk3_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg5_Blk3_Res1_relu (Activation (None, 12, 12, 128)  0           Stg5_Blk3_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg5_Blk3_Res2_conv (Conv2D)    (None, 12, 12, 128)  147584      Stg5_Blk3_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg5_Blk3_Res2_bn (BatchNormali (None, 12, 12, 128)  512         Stg5_Blk3_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg5_Blk3_add (Add)             (None, 12, 12, 128)  0           Stg5_Blk2_relu[0][0]             
                                                                 Stg5_Blk3_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg5_Blk3_relu (Activation)     (None, 12, 12, 128)  0           Stg5_Blk3_add[0][0]              
__________________________________________________________________________________________________
dropout_16 (Dropout)            (None, 12, 12, 128)  0           Stg5_Blk3_relu[0][0]             
__________________________________________________________________________________________________
Stg6_Blk1_Res1_conv (Conv2D)    (None, 6, 6, 256)    295168      dropout_16[0][0]                 
__________________________________________________________________________________________________
Stg6_Blk1_Res1_bn (BatchNormali (None, 6, 6, 256)    1024        Stg6_Blk1_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg6_Blk1_Res1_relu (Activation (None, 6, 6, 256)    0           Stg6_Blk1_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg6_Blk1_Res2_conv (Conv2D)    (None, 6, 6, 256)    590080      Stg6_Blk1_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg6_Blk1_lin_conv (Conv2D)     (None, 6, 6, 256)    33024       dropout_16[0][0]                 
__________________________________________________________________________________________________
Stg6_Blk1_Res2_bn (BatchNormali (None, 6, 6, 256)    1024        Stg6_Blk1_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg6_Blk1_add (Add)             (None, 6, 6, 256)    0           Stg6_Blk1_lin_conv[0][0]         
                                                                 Stg6_Blk1_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg6_Blk1_relu (Activation)     (None, 6, 6, 256)    0           Stg6_Blk1_add[0][0]              
__________________________________________________________________________________________________
Stg6_Blk2_Res1_conv (Conv2D)    (None, 6, 6, 256)    590080      Stg6_Blk1_relu[0][0]             
__________________________________________________________________________________________________
Stg6_Blk2_Res1_bn (BatchNormali (None, 6, 6, 256)    1024        Stg6_Blk2_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg6_Blk2_Res1_relu (Activation (None, 6, 6, 256)    0           Stg6_Blk2_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg6_Blk2_Res2_conv (Conv2D)    (None, 6, 6, 256)    590080      Stg6_Blk2_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg6_Blk2_Res2_bn (BatchNormali (None, 6, 6, 256)    1024        Stg6_Blk2_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg6_Blk2_add (Add)             (None, 6, 6, 256)    0           Stg6_Blk1_relu[0][0]             
                                                                 Stg6_Blk2_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg6_Blk2_relu (Activation)     (None, 6, 6, 256)    0           Stg6_Blk2_add[0][0]              
__________________________________________________________________________________________________
Stg6_Blk3_Res1_conv (Conv2D)    (None, 6, 6, 256)    590080      Stg6_Blk2_relu[0][0]             
__________________________________________________________________________________________________
Stg6_Blk3_Res1_bn (BatchNormali (None, 6, 6, 256)    1024        Stg6_Blk3_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg6_Blk3_Res1_relu (Activation (None, 6, 6, 256)    0           Stg6_Blk3_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg6_Blk3_Res2_conv (Conv2D)    (None, 6, 6, 256)    590080      Stg6_Blk3_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg6_Blk3_Res2_bn (BatchNormali (None, 6, 6, 256)    1024        Stg6_Blk3_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg6_Blk3_add (Add)             (None, 6, 6, 256)    0           Stg6_Blk2_relu[0][0]             
                                                                 Stg6_Blk3_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg6_Blk3_relu (Activation)     (None, 6, 6, 256)    0           Stg6_Blk3_add[0][0]              
__________________________________________________________________________________________________
dropout_17 (Dropout)            (None, 6, 6, 256)    0           Stg6_Blk3_relu[0][0]             
__________________________________________________________________________________________________
AvgPool (AveragePooling2D)      (None, 1, 1, 256)    0           dropout_17[0][0]                 
__________________________________________________________________________________________________
flatten (Flatten)               (None, 256)          0           AvgPool[0][0]                    
__________________________________________________________________________________________________
dense (Dense)                   (None, 2)            514         flatten[0][0]                    
==================================================================================================
Total params: 5,270,786
Trainable params: 5,263,266
Non-trainable params: 7,520
__________________________________________________________________________________________________
In [13]:
# Create checkpoint for the training
# This checkpoint performs model saving when
# an epoch gives highest testing accuracy
# filepath        = modelname + ".hdf5"
# checkpoint      = ModelCheckpoint(filepath, 
#                                   monitor='val_acc', 
#                                   verbose=0, 
#                                   save_best_only=True, 
#                                   mode='max')

#                             # Log the epoch detail into csv
# csv_logger      = CSVLogger(modelname +'.csv')
# callbacks_list  = [checkpoint,csv_logger]

def lrSchedule(epoch):
    lr  = 1e-3
    
    if epoch > 110:
        lr  *= 0.5e-3
        
    elif epoch > 100:
        lr  *= 1e-3
        
    elif epoch > 80:
        lr  *= 1e-2
        
    elif epoch > 60:
        lr  *= 1e-1
        
    print('Learning rate: ', lr)
    
    return lr

LRScheduler     = LearningRateScheduler(lrSchedule)

                            # Create checkpoint for the training
                            # This checkpoint performs model saving when
                            # an epoch gives highest testing accuracy
filepath        = modelname + ".hdf5"
checkpoint      = ModelCheckpoint(filepath, 
                                  monitor='val_acc', 
                                  verbose=0, 
                                  save_best_only=True, 
                                  mode='max')

                            # Log the epoch detail into csv
csv_logger      = CSVLogger(modelname +'.csv')
callbacks_list  = [checkpoint, csv_logger, LRScheduler]
#callbacks_list  = [checkpoint, csv_logger]
In [14]:
# Fit the model
# This is where the training starts
# model.fit(trDat, 
#           trLbl, 
#           validation_data=(tsDat, tsLbl), 
#           epochs=120, 
#           batch_size=32,
#           callbacks=callbacks_list)

datagen = ImageDataGenerator(width_shift_range=0.25,
                             height_shift_range=0.25,
                             rotation_range=45,
                             zoom_range=0.8,
                             #zca_epsilon=1e-6,
                             #zca_whitening=True,
                             fill_mode='nearest',
                             horizontal_flip=True,
                             vertical_flip=False)

model.fit_generator(datagen.flow(trDat, trLbl, batch_size=24),
                    validation_data=(tsDat, tsLbl),
                    epochs=120, 
                    verbose=1,
                    steps_per_epoch=len(trDat)/24,
                    callbacks=callbacks_list)
Learning rate:  0.001
Epoch 1/120
178/177 [==============================] - 75s 419ms/step - loss: 1.8872 - acc: 0.6420 - val_loss: 1.6569 - val_acc: 0.4995
Learning rate:  0.001
Epoch 2/120
178/177 [==============================] - 38s 215ms/step - loss: 1.1870 - acc: 0.7131 - val_loss: 1.2356 - val_acc: 0.5173
Learning rate:  0.001
Epoch 3/120
178/177 [==============================] - 38s 213ms/step - loss: 0.9428 - acc: 0.7336 - val_loss: 0.9938 - val_acc: 0.5783
Learning rate:  0.001
Epoch 4/120
178/177 [==============================] - 37s 208ms/step - loss: 0.8805 - acc: 0.7551 - val_loss: 0.8612 - val_acc: 0.6392
Learning rate:  0.001
Epoch 5/120
178/177 [==============================] - 37s 208ms/step - loss: 0.7804 - acc: 0.7706 - val_loss: 0.7212 - val_acc: 0.8069
Learning rate:  0.001
Epoch 6/120
178/177 [==============================] - 37s 209ms/step - loss: 0.7410 - acc: 0.7789 - val_loss: 0.6815 - val_acc: 0.7816
Learning rate:  0.001
Epoch 7/120
178/177 [==============================] - 37s 210ms/step - loss: 0.7219 - acc: 0.7871 - val_loss: 0.7500 - val_acc: 0.6420
Learning rate:  0.001
Epoch 8/120
178/177 [==============================] - 37s 207ms/step - loss: 0.6823 - acc: 0.7897 - val_loss: 0.6519 - val_acc: 0.7938
Learning rate:  0.001
Epoch 9/120
178/177 [==============================] - 38s 214ms/step - loss: 0.6408 - acc: 0.7968 - val_loss: 0.6977 - val_acc: 0.7179
Learning rate:  0.001
Epoch 10/120
178/177 [==============================] - 37s 208ms/step - loss: 0.6145 - acc: 0.7961 - val_loss: 0.5988 - val_acc: 0.8135
Learning rate:  0.001
Epoch 11/120
178/177 [==============================] - 38s 215ms/step - loss: 0.5868 - acc: 0.8011 - val_loss: 0.5842 - val_acc: 0.7985
Learning rate:  0.001
Epoch 12/120
178/177 [==============================] - 37s 207ms/step - loss: 0.5691 - acc: 0.8092 - val_loss: 0.6075 - val_acc: 0.7610
Learning rate:  0.001
Epoch 13/120
178/177 [==============================] - 37s 207ms/step - loss: 0.5373 - acc: 0.8136 - val_loss: 0.5461 - val_acc: 0.8022
Learning rate:  0.001
Epoch 14/120
178/177 [==============================] - 37s 209ms/step - loss: 0.5236 - acc: 0.8193 - val_loss: 0.4155 - val_acc: 0.8810
Learning rate:  0.001
Epoch 15/120
178/177 [==============================] - 37s 207ms/step - loss: 0.5084 - acc: 0.8174 - val_loss: 0.5597 - val_acc: 0.7919
Learning rate:  0.001
Epoch 16/120
178/177 [==============================] - 38s 213ms/step - loss: 0.4999 - acc: 0.8260 - val_loss: 0.5424 - val_acc: 0.7882
Learning rate:  0.001
Epoch 17/120
178/177 [==============================] - 38s 212ms/step - loss: 0.4816 - acc: 0.8336 - val_loss: 0.4667 - val_acc: 0.8529
Learning rate:  0.001
Epoch 18/120
178/177 [==============================] - 37s 210ms/step - loss: 0.4721 - acc: 0.8360 - val_loss: 0.4098 - val_acc: 0.8735
Learning rate:  0.001
Epoch 19/120
178/177 [==============================] - 38s 214ms/step - loss: 0.4640 - acc: 0.8407 - val_loss: 0.5935 - val_acc: 0.7713
Learning rate:  0.001
Epoch 20/120
178/177 [==============================] - 38s 212ms/step - loss: 0.4638 - acc: 0.8339 - val_loss: 0.4130 - val_acc: 0.8866
Learning rate:  0.001
Epoch 21/120
178/177 [==============================] - 38s 216ms/step - loss: 0.4537 - acc: 0.8349 - val_loss: 0.3800 - val_acc: 0.8866
Learning rate:  0.001
Epoch 22/120
178/177 [==============================] - 37s 210ms/step - loss: 0.4480 - acc: 0.8358 - val_loss: 0.4556 - val_acc: 0.89031
Learning rate:  0.001
Epoch 23/120
178/177 [==============================] - 37s 209ms/step - loss: 0.4281 - acc: 0.8411 - val_loss: 0.4966 - val_acc: 0.8294
Learning rate:  0.001
Epoch 24/120
178/177 [==============================] - 38s 214ms/step - loss: 0.4397 - acc: 0.8371 - val_loss: 0.8313 - val_acc: 0.7657
Learning rate:  0.001
Epoch 25/120
178/177 [==============================] - 38s 213ms/step - loss: 0.4348 - acc: 0.8448 - val_loss: 0.3727 - val_acc: 0.8679
Learning rate:  0.001
Epoch 26/120
178/177 [==============================] - 39s 222ms/step - loss: 0.4276 - acc: 0.8395 - val_loss: 0.4041 - val_acc: 0.8585
Learning rate:  0.001
Epoch 27/120
178/177 [==============================] - 40s 223ms/step - loss: 0.4318 - acc: 0.8415 - val_loss: 0.4010 - val_acc: 0.8857
Learning rate:  0.001
Epoch 28/120
178/177 [==============================] - 38s 211ms/step - loss: 0.4145 - acc: 0.8480 - val_loss: 0.3899 - val_acc: 0.8707
Learning rate:  0.001
Epoch 29/120
178/177 [==============================] - 38s 216ms/step - loss: 0.4032 - acc: 0.8469 - val_loss: 0.3517 - val_acc: 0.8885
Learning rate:  0.001
Epoch 30/120
178/177 [==============================] - 39s 218ms/step - loss: 0.4000 - acc: 0.8573 - val_loss: 0.3942 - val_acc: 0.8857
Learning rate:  0.001
Epoch 31/120
178/177 [==============================] - 38s 213ms/step - loss: 0.4192 - acc: 0.8450 - val_loss: 0.4250 - val_acc: 0.8519
Learning rate:  0.001
Epoch 32/120
178/177 [==============================] - 39s 217ms/step - loss: 0.4075 - acc: 0.8468 - val_loss: 0.3459 - val_acc: 0.8922
Learning rate:  0.001
Epoch 33/120
178/177 [==============================] - 38s 214ms/step - loss: 0.3956 - acc: 0.8538 - val_loss: 0.5880 - val_acc: 0.7245
Learning rate:  0.001
Epoch 34/120
178/177 [==============================] - 39s 220ms/step - loss: 0.4014 - acc: 0.8512 - val_loss: 0.3296 - val_acc: 0.8978
Learning rate:  0.001
Epoch 35/120
178/177 [==============================] - 37s 209ms/step - loss: 0.4016 - acc: 0.8529 - val_loss: 0.3960 - val_acc: 0.8641
Learning rate:  0.001
Epoch 36/120
178/177 [==============================] - 39s 218ms/step - loss: 0.3988 - acc: 0.8590 - val_loss: 0.3124 - val_acc: 0.9053
Learning rate:  0.001
Epoch 37/120
178/177 [==============================] - 37s 210ms/step - loss: 0.3888 - acc: 0.8531 - val_loss: 0.2972 - val_acc: 0.9053
Learning rate:  0.001
Epoch 38/120
178/177 [==============================] - 37s 209ms/step - loss: 0.3864 - acc: 0.8562 - val_loss: 0.3861 - val_acc: 0.8472
Learning rate:  0.001
Epoch 39/120
178/177 [==============================] - 37s 210ms/step - loss: 0.3999 - acc: 0.8570 - val_loss: 0.3314 - val_acc: 0.8997
Learning rate:  0.001
Epoch 40/120
178/177 [==============================] - 38s 215ms/step - loss: 0.3798 - acc: 0.8600 - val_loss: 0.3696 - val_acc: 0.9016
Learning rate:  0.001
Epoch 41/120
178/177 [==============================] - 38s 215ms/step - loss: 0.3974 - acc: 0.8525 - val_loss: 0.3331 - val_acc: 0.8997
Learning rate:  0.001
Epoch 42/120
178/177 [==============================] - 38s 211ms/step - loss: 0.3825 - acc: 0.8612 - val_loss: 0.4356 - val_acc: 0.8313
Learning rate:  0.001
Epoch 43/120
178/177 [==============================] - 39s 216ms/step - loss: 0.3788 - acc: 0.8627 - val_loss: 0.3346 - val_acc: 0.9053
Learning rate:  0.001
Epoch 44/120
178/177 [==============================] - 37s 210ms/step - loss: 0.3691 - acc: 0.8644 - val_loss: 0.3052 - val_acc: 0.9007
Learning rate:  0.001
Epoch 45/120
178/177 [==============================] - 37s 209ms/step - loss: 0.3701 - acc: 0.8709 - val_loss: 0.3231 - val_acc: 0.8960
Learning rate:  0.001
Epoch 46/120
178/177 [==============================] - 38s 213ms/step - loss: 0.3708 - acc: 0.8596 - val_loss: 0.3058 - val_acc: 0.9025
Learning rate:  0.001
Epoch 47/120
178/177 [==============================] - 37s 209ms/step - loss: 0.3819 - acc: 0.8636 - val_loss: 0.3445 - val_acc: 0.8894
Learning rate:  0.001
Epoch 48/120
178/177 [==============================] - 38s 211ms/step - loss: 0.3669 - acc: 0.8663 - val_loss: 0.3395 - val_acc: 0.8875
Learning rate:  0.001
Epoch 49/120
178/177 [==============================] - 37s 209ms/step - loss: 0.3728 - acc: 0.8570 - val_loss: 0.3219 - val_acc: 0.8828
Learning rate:  0.001
Epoch 50/120
178/177 [==============================] - 37s 208ms/step - loss: 0.3623 - acc: 0.8706 - val_loss: 0.3560 - val_acc: 0.8660
Learning rate:  0.001
Epoch 51/120
178/177 [==============================] - 37s 209ms/step - loss: 0.3644 - acc: 0.8704 - val_loss: 0.3584 - val_acc: 0.8800
Learning rate:  0.001
Epoch 52/120
178/177 [==============================] - 37s 209ms/step - loss: 0.3680 - acc: 0.8659 - val_loss: 0.3251 - val_acc: 0.8913
Learning rate:  0.001
Epoch 53/120
178/177 [==============================] - 37s 209ms/step - loss: 0.3732 - acc: 0.8599 - val_loss: 0.3634 - val_acc: 0.8594
Learning rate:  0.001
Epoch 54/120
178/177 [==============================] - 38s 212ms/step - loss: 0.3594 - acc: 0.8687 - val_loss: 0.3370 - val_acc: 0.9025
Learning rate:  0.001
Epoch 55/120
178/177 [==============================] - 37s 209ms/step - loss: 0.3567 - acc: 0.8680 - val_loss: 0.4498 - val_acc: 0.8669
Learning rate:  0.001
Epoch 56/120
178/177 [==============================] - 37s 209ms/step - loss: 0.3792 - acc: 0.8566 - val_loss: 0.3274 - val_acc: 0.9007
Learning rate:  0.001
Epoch 57/120
178/177 [==============================] - 38s 211ms/step - loss: 0.3641 - acc: 0.8599 - val_loss: 0.3864 - val_acc: 0.8472
Learning rate:  0.001
Epoch 58/120
178/177 [==============================] - 38s 213ms/step - loss: 0.3541 - acc: 0.8725 - val_loss: 0.3102 - val_acc: 0.9082
Learning rate:  0.001
Epoch 59/120
178/177 [==============================] - 37s 209ms/step - loss: 0.3506 - acc: 0.8672 - val_loss: 0.3938 - val_acc: 0.8557
Learning rate:  0.001
Epoch 60/120
178/177 [==============================] - 38s 211ms/step - loss: 0.3617 - acc: 0.8643 - val_loss: 1.7044 - val_acc: 0.6879
Learning rate:  0.001
Epoch 61/120
178/177 [==============================] - 38s 212ms/step - loss: 0.3497 - acc: 0.8762 - val_loss: 0.3344 - val_acc: 0.8772
Learning rate:  0.0001
Epoch 62/120
178/177 [==============================] - 38s 216ms/step - loss: 0.3276 - acc: 0.8832 - val_loss: 0.2759 - val_acc: 0.9091
Learning rate:  0.0001
Epoch 63/120
178/177 [==============================] - 38s 212ms/step - loss: 0.3056 - acc: 0.8893 - val_loss: 0.2740 - val_acc: 0.9119
Learning rate:  0.0001
Epoch 64/120
178/177 [==============================] - 38s 211ms/step - loss: 0.3133 - acc: 0.8885 - val_loss: 0.2879 - val_acc: 0.9035
Learning rate:  0.0001
Epoch 65/120
178/177 [==============================] - 37s 208ms/step - loss: 0.3132 - acc: 0.8861 - val_loss: 0.2740 - val_acc: 0.9091
Learning rate:  0.0001
Epoch 66/120
178/177 [==============================] - 37s 211ms/step - loss: 0.3070 - acc: 0.8892 - val_loss: 0.2661 - val_acc: 0.9128
Learning rate:  0.0001
Epoch 67/120
178/177 [==============================] - 38s 213ms/step - loss: 0.3052 - acc: 0.8888 - val_loss: 0.2682 - val_acc: 0.9082
Learning rate:  0.0001
Epoch 68/120
178/177 [==============================] - 37s 210ms/step - loss: 0.3070 - acc: 0.8934 - val_loss: 0.2744 - val_acc: 0.9100
Learning rate:  0.0001
Epoch 69/120
178/177 [==============================] - 37s 209ms/step - loss: 0.3026 - acc: 0.8921 - val_loss: 0.2705 - val_acc: 0.9119oss: 0.3036 - acc: 0.
Learning rate:  0.0001
Epoch 70/120
178/177 [==============================] - 38s 212ms/step - loss: 0.3076 - acc: 0.8878 - val_loss: 0.2647 - val_acc: 0.9138
Learning rate:  0.0001
Epoch 71/120
178/177 [==============================] - 37s 208ms/step - loss: 0.3071 - acc: 0.8927 - val_loss: 0.2665 - val_acc: 0.9110
Learning rate:  0.0001
Epoch 72/120
178/177 [==============================] - 37s 208ms/step - loss: 0.3061 - acc: 0.8869 - val_loss: 0.2647 - val_acc: 0.9082
Learning rate:  0.0001
Epoch 73/120
178/177 [==============================] - 38s 211ms/step - loss: 0.3038 - acc: 0.8930 - val_loss: 0.2642 - val_acc: 0.9100
Learning rate:  0.0001
Epoch 74/120
178/177 [==============================] - 37s 208ms/step - loss: 0.2932 - acc: 0.8978 - val_loss: 0.2638 - val_acc: 0.9138
Learning rate:  0.0001
Epoch 75/120
178/177 [==============================] - 37s 208ms/step - loss: 0.2994 - acc: 0.8930 - val_loss: 0.2621 - val_acc: 0.9100
Learning rate:  0.0001
Epoch 76/120
178/177 [==============================] - 37s 211ms/step - loss: 0.2887 - acc: 0.8970 - val_loss: 0.2857 - val_acc: 0.9063
Learning rate:  0.0001
Epoch 77/120
178/177 [==============================] - 37s 210ms/step - loss: 0.3084 - acc: 0.8930 - val_loss: 0.2578 - val_acc: 0.9147
Learning rate:  0.0001
Epoch 78/120
178/177 [==============================] - 37s 208ms/step - loss: 0.3042 - acc: 0.8930 - val_loss: 0.2614 - val_acc: 0.9100
Learning rate:  0.0001
Epoch 79/120
178/177 [==============================] - 37s 208ms/step - loss: 0.2905 - acc: 0.9012 - val_loss: 0.2620 - val_acc: 0.9091
Learning rate:  0.0001
Epoch 80/120
178/177 [==============================] - 37s 208ms/step - loss: 0.2874 - acc: 0.8975 - val_loss: 0.2727 - val_acc: 0.9072
Learning rate:  0.0001
Epoch 81/120
178/177 [==============================] - 37s 210ms/step - loss: 0.2930 - acc: 0.8930 - val_loss: 0.2628 - val_acc: 0.9091
Learning rate:  1e-05
Epoch 82/120
178/177 [==============================] - 37s 208ms/step - loss: 0.2881 - acc: 0.9002 - val_loss: 0.2635 - val_acc: 0.9082
Learning rate:  1e-05
Epoch 83/120
178/177 [==============================] - 38s 211ms/step - loss: 0.2912 - acc: 0.8920 - val_loss: 0.2601 - val_acc: 0.9128
Learning rate:  1e-05
Epoch 84/120
178/177 [==============================] - 37s 209ms/step - loss: 0.2946 - acc: 0.8892 - val_loss: 0.2603 - val_acc: 0.9128
Learning rate:  1e-05
Epoch 85/120
178/177 [==============================] - 38s 211ms/step - loss: 0.3008 - acc: 0.8950 - val_loss: 0.2577 - val_acc: 0.9128
Learning rate:  1e-05
Epoch 86/120
178/177 [==============================] - 37s 209ms/step - loss: 0.3017 - acc: 0.8920 - val_loss: 0.2573 - val_acc: 0.9138
Learning rate:  1e-05
Epoch 87/120
178/177 [==============================] - 38s 213ms/step - loss: 0.2867 - acc: 0.8981 - val_loss: 0.2565 - val_acc: 0.9119
Learning rate:  1e-05
Epoch 88/120
178/177 [==============================] - 37s 209ms/step - loss: 0.2858 - acc: 0.8965 - val_loss: 0.2576 - val_acc: 0.9110
Learning rate:  1e-05
Epoch 89/120
178/177 [==============================] - 38s 214ms/step - loss: 0.2951 - acc: 0.8907 - val_loss: 0.2570 - val_acc: 0.9119
Learning rate:  1e-05
Epoch 90/120
178/177 [==============================] - 38s 214ms/step - loss: 0.2763 - acc: 0.9045 - val_loss: 0.2590 - val_acc: 0.9110
Learning rate:  1e-05
Epoch 91/120
178/177 [==============================] - 38s 211ms/step - loss: 0.2868 - acc: 0.8974 - val_loss: 0.2584 - val_acc: 0.9138
Learning rate:  1e-05
Epoch 92/120
178/177 [==============================] - 37s 208ms/step - loss: 0.2913 - acc: 0.8950 - val_loss: 0.2581 - val_acc: 0.9138
Learning rate:  1e-05
Epoch 93/120
178/177 [==============================] - 37s 208ms/step - loss: 0.2897 - acc: 0.8969 - val_loss: 0.2583 - val_acc: 0.9119
Learning rate:  1e-05
Epoch 94/120
178/177 [==============================] - 37s 210ms/step - loss: 0.2915 - acc: 0.8985 - val_loss: 0.2577 - val_acc: 0.9119
Learning rate:  1e-05
Epoch 95/120
178/177 [==============================] - 37s 208ms/step - loss: 0.2976 - acc: 0.8970 - val_loss: 0.2573 - val_acc: 0.9110
Learning rate:  1e-05
Epoch 96/120
178/177 [==============================] - 37s 210ms/step - loss: 0.2922 - acc: 0.8915 - val_loss: 0.2595 - val_acc: 0.9110
Learning rate:  1e-05
Epoch 97/120
178/177 [==============================] - 38s 211ms/step - loss: 0.2951 - acc: 0.8961 - val_loss: 0.2575 - val_acc: 0.9110
Learning rate:  1e-05
Epoch 98/120
178/177 [==============================] - 37s 209ms/step - loss: 0.2923 - acc: 0.8944 - val_loss: 0.2579 - val_acc: 0.9119
Learning rate:  1e-05
Epoch 99/120
178/177 [==============================] - 37s 208ms/step - loss: 0.2996 - acc: 0.8892 - val_loss: 0.2581 - val_acc: 0.9138
Learning rate:  1e-05
Epoch 100/120
178/177 [==============================] - 37s 208ms/step - loss: 0.2900 - acc: 0.8942 - val_loss: 0.2576 - val_acc: 0.9110
Learning rate:  1e-05
Epoch 101/120
178/177 [==============================] - 38s 213ms/step - loss: 0.2863 - acc: 0.8982 - val_loss: 0.2579 - val_acc: 0.9128
Learning rate:  1e-06
Epoch 102/120
178/177 [==============================] - 38s 213ms/step - loss: 0.2844 - acc: 0.8979 - val_loss: 0.2573 - val_acc: 0.9119
Learning rate:  1e-06
Epoch 103/120
178/177 [==============================] - 37s 209ms/step - loss: 0.2971 - acc: 0.8926 - val_loss: 0.2573 - val_acc: 0.9128
Learning rate:  1e-06
Epoch 104/120
178/177 [==============================] - 37s 209ms/step - loss: 0.2853 - acc: 0.8975 - val_loss: 0.2575 - val_acc: 0.9128
Learning rate:  1e-06
Epoch 105/120
178/177 [==============================] - 37s 209ms/step - loss: 0.2809 - acc: 0.9002 - val_loss: 0.2578 - val_acc: 0.9119
Learning rate:  1e-06
Epoch 106/120
178/177 [==============================] - 38s 216ms/step - loss: 0.2942 - acc: 0.8969 - val_loss: 0.2576 - val_acc: 0.9138
Learning rate:  1e-06
Epoch 107/120
178/177 [==============================] - 39s 216ms/step - loss: 0.2838 - acc: 0.9040 - val_loss: 0.2584 - val_acc: 0.9119
Learning rate:  1e-06
Epoch 108/120
178/177 [==============================] - 38s 212ms/step - loss: 0.2816 - acc: 0.8993 - val_loss: 0.2585 - val_acc: 0.9110
Learning rate:  1e-06
Epoch 109/120
178/177 [==============================] - 38s 213ms/step - loss: 0.2853 - acc: 0.8974 - val_loss: 0.2583 - val_acc: 0.9119
Learning rate:  1e-06
Epoch 110/120
178/177 [==============================] - 38s 212ms/step - loss: 0.2845 - acc: 0.8969 - val_loss: 0.2581 - val_acc: 0.9110
Learning rate:  1e-06
Epoch 111/120
178/177 [==============================] - 37s 208ms/step - loss: 0.2904 - acc: 0.8923 - val_loss: 0.2580 - val_acc: 0.9110
Learning rate:  5e-07
Epoch 112/120
178/177 [==============================] - 38s 211ms/step - loss: 0.2775 - acc: 0.8993 - val_loss: 0.2582 - val_acc: 0.9147
Learning rate:  5e-07
Epoch 113/120
178/177 [==============================] - 37s 208ms/step - loss: 0.2879 - acc: 0.8962 - val_loss: 0.2575 - val_acc: 0.9128
Learning rate:  5e-07
Epoch 114/120
178/177 [==============================] - 38s 212ms/step - loss: 0.2809 - acc: 0.8983 - val_loss: 0.2577 - val_acc: 0.9119
Learning rate:  5e-07
Epoch 115/120
178/177 [==============================] - 37s 210ms/step - loss: 0.2898 - acc: 0.8972 - val_loss: 0.2579 - val_acc: 0.9110
Learning rate:  5e-07
Epoch 116/120
178/177 [==============================] - 37s 208ms/step - loss: 0.2846 - acc: 0.8969 - val_loss: 0.2577 - val_acc: 0.9119
Learning rate:  5e-07
Epoch 117/120
178/177 [==============================] - 37s 208ms/step - loss: 0.2806 - acc: 0.9007 - val_loss: 0.2575 - val_acc: 0.9128
Learning rate:  5e-07
Epoch 118/120
178/177 [==============================] - 37s 210ms/step - loss: 0.2917 - acc: 0.8968 - val_loss: 0.2580 - val_acc: 0.9110
Learning rate:  5e-07
Epoch 119/120
178/177 [==============================] - 37s 210ms/step - loss: 0.2807 - acc: 0.9040 - val_loss: 0.2579 - val_acc: 0.9110
Learning rate:  5e-07
Epoch 120/120
178/177 [==============================] - 37s 209ms/step - loss: 0.2796 - acc: 0.9003 - val_loss: 0.2579 - val_acc: 0.9119
Out[14]:
<tensorflow.python.keras.callbacks.History at 0x1ea2b39aa58>
In [15]:
## Now the training is complete, we get
# another object to load the weights
# compile it, so that we can do 
# final evaluation on it
modelGo.load_weights(filepath)
modelGo.compile(loss='categorical_crossentropy', 
                optimizer='adam', 
                metrics=['accuracy'])
In [16]:
# Make classification on the test dataset
predicts    = modelGo.predict(tsDat)

# Prepare the classification output
# for the classification report
predout     = np.argmax(predicts,axis=1)
testout     = np.argmax(tsLbl,axis=1)
labelname   = ['non-flower', 'flower']
                                            # the labels for the classfication report


testScores  = metrics.accuracy_score(testout,predout)
confusion   = metrics.confusion_matrix(testout,predout)


print("Best accuracy (on testing dataset): %.2f%%" % (testScores*100))
print(metrics.classification_report(testout,predout,target_names=labelname,digits=4))
print(confusion)
Best accuracy (on testing dataset): 91.47%
              precision    recall  f1-score   support

  non-flower     0.9007    0.8987    0.8997       454
      flower     0.9251    0.9266    0.9258       613

    accuracy                         0.9147      1067
   macro avg     0.9129    0.9126    0.9128      1067
weighted avg     0.9147    0.9147    0.9147      1067

[[408  46]
 [ 45 568]]
In [17]:
import pandas as pd

records     = pd.read_csv(modelname +'.csv')
plt.figure()
plt.subplot(211)
plt.plot(records['val_loss'])
plt.plot(records['loss'])
plt.yticks([0, 0.20, 0.30, 0.4, 0.5])
plt.title('Loss value',fontsize=12)

ax          = plt.gca()
ax.set_xticklabels([])



plt.subplot(212)
plt.plot(records['val_acc'])
plt.plot(records['acc'])
plt.yticks([0.7, 0.8, 0.9, 1.0])
plt.title('Accuracy',fontsize=12)
plt.show()
In [18]:
wrong_ans_index = []

for i in range(len(predout)):
    if predout[i] != testout[i]:
        wrong_ans_index.append(i)
In [19]:
wrong_ans_index = list(set(wrong_ans_index))
In [ ]:
# Randomly show X examples of that was wrong

dataset = tsDatOrg #flowers #fungus #rocks

for index in wrong_ans_index:
    #index = wrong_ans_index[random.randint(0, len(wrong_ans_index)-1)]
    print("Showing %s index image" %(index))
    print("Predicted as %s but is actually %s" %(predout[index], testout[index]))
    imgplot = plt.imshow(data[index])
    plt.show()
Showing 3 index image
Predicted as 0 but is actually 1
Showing 12 index image
Predicted as 0 but is actually 1
Showing 1049 index image
Predicted as 1 but is actually 0
Showing 539 index image
Predicted as 0 but is actually 1
Showing 540 index image
Predicted as 0 but is actually 1
Showing 1054 index image
Predicted as 1 but is actually 0
Showing 1056 index image
Predicted as 1 but is actually 0
Showing 545 index image
Predicted as 0 but is actually 1
Showing 556 index image
Predicted as 0 but is actually 1
Showing 50 index image
Predicted as 0 but is actually 1
Showing 72 index image
Predicted as 0 but is actually 1
Showing 75 index image
Predicted as 0 but is actually 1
Showing 599 index image
Predicted as 0 but is actually 1
Showing 92 index image
Predicted as 0 but is actually 1
Showing 615 index image
Predicted as 1 but is actually 0
Showing 616 index image
Predicted as 1 but is actually 0
Showing 110 index image
Predicted as 0 but is actually 1
Showing 113 index image
Predicted as 0 but is actually 1
Showing 131 index image
Predicted as 0 but is actually 1
Showing 645 index image
Predicted as 1 but is actually 0
Showing 137 index image
Predicted as 0 but is actually 1
Showing 141 index image
Predicted as 0 but is actually 1
Showing 147 index image
Predicted as 0 but is actually 1
Showing 148 index image
Predicted as 0 but is actually 1
Showing 662 index image
Predicted as 1 but is actually 0
Showing 675 index image
Predicted as 1 but is actually 0
Showing 176 index image
Predicted as 0 but is actually 1
Showing 183 index image
Predicted as 0 but is actually 1
Showing 695 index image
Predicted as 1 but is actually 0
Showing 188 index image
Predicted as 0 but is actually 1
Showing 712 index image
Predicted as 1 but is actually 0
Showing 204 index image
Predicted as 0 but is actually 1
Showing 205 index image
Predicted as 0 but is actually 1
Showing 720 index image
Predicted as 1 but is actually 0
Showing 220 index image
Predicted as 0 but is actually 1
Showing 733 index image
Predicted as 1 but is actually 0
Showing 743 index image
Predicted as 1 but is actually 0
Showing 233 index image
Predicted as 0 but is actually 1
Showing 752 index image
Predicted as 1 but is actually 0
Showing 775 index image
Predicted as 1 but is actually 0
Showing 269 index image
Predicted as 0 but is actually 1
Showing 790 index image
Predicted as 1 but is actually 0
Showing 279 index image
Predicted as 0 but is actually 1
Showing 791 index image
Predicted as 1 but is actually 0
Showing 792 index image
Predicted as 1 but is actually 0
Showing 794 index image
Predicted as 1 but is actually 0
Showing 796 index image
Predicted as 1 but is actually 0
Showing 288 index image
Predicted as 0 but is actually 1
Showing 805 index image
Predicted as 1 but is actually 0
Showing 300 index image
Predicted as 0 but is actually 1
In [ ]:
# Stacking 3 NNs?